bitkeeper revision 1.1696 (42a81fe89VPLawDxu2WTiKLZH-Br3Q)
authorsos22@douglas.cl.cam.ac.uk <sos22@douglas.cl.cam.ac.uk>
Thu, 9 Jun 2005 10:54:32 +0000 (10:54 +0000)
committersos22@douglas.cl.cam.ac.uk <sos22@douglas.cl.cam.ac.uk>
Thu, 9 Jun 2005 10:54:32 +0000 (10:54 +0000)
Make sure the grant table stuff uses locked cmpxchg instructions even when compiled
!CONFIG_SMP.

Signed-off-by: Steven Smith <sos22@cam.ac.uk>
linux-2.6.11-xen-sparse/arch/xen/kernel/gnttab.c
linux-2.6.11-xen-sparse/include/asm-xen/asm-i386/synch_bitops.h

index 8fe43247ac7ca548d1be2d249f01bedf1bef8f6b..0106a6fea04b1f7e075d89cd98fc95e15aab9b5a 100644 (file)
@@ -19,6 +19,7 @@
 #include <asm-xen/xen_proc.h>
 #include <asm-xen/linux-public/privcmd.h>
 #include <asm-xen/gnttab.h>
+#include <asm/synch_bitops.h>
 
 #if 1
 #define ASSERT(_p) \
@@ -125,7 +126,7 @@ gnttab_end_foreign_access( grant_ref_t ref, int readonly )
         if ( (flags = nflags) & (GTF_reading|GTF_writing) )
             printk(KERN_ALERT "WARNING: g.e. still in use!\n");
     }
-    while ( (nflags = cmpxchg(&shared[ref].flags, flags, 0)) != flags );
+    while ( (nflags = synch_cmpxchg(&shared[ref].flags, flags, 0)) != flags );
 
     put_free_entry(ref);
 }
@@ -172,7 +173,7 @@ gnttab_end_foreign_transfer(
      * Otherwise invalidate the grant entry against future use.
      */
     if ( likely(flags != GTF_accept_transfer) ||
-         (cmpxchg(&shared[ref].flags, flags, 0) != GTF_accept_transfer) )
+         (synch_cmpxchg(&shared[ref].flags, flags, 0) != GTF_accept_transfer) )
         while ( unlikely((frame = shared[ref].frame) == 0) )
             cpu_relax();
 
index 8093de0ac904c18fa71da660fcd54af1d9c55ce6..6af3ad2857a6f74c43204a8791ebce55319b7b2e 100644 (file)
@@ -60,6 +60,46 @@ static __inline__ int synch_test_and_change_bit(int nr, volatile void * addr)
     return oldbit;
 }
 
+struct __synch_xchg_dummy { unsigned long a[100]; };
+#define __synch_xg(x) ((struct __synch_xchg_dummy *)(x))
+
+#define synch_cmpxchg(ptr, old, new) \
+((__typeof__(*(ptr)))__synch_cmpxchg((ptr),\
+                                     (unsigned long)(old), \
+                                     (unsigned long)(new), \
+                                     sizeof(*(ptr))))
+
+static inline unsigned long __synch_cmpxchg(volatile void *ptr,
+                                           unsigned long old,
+                                           unsigned long new, int size)
+{
+       unsigned long prev;
+       switch (size) {
+       case 1:
+               __asm__ __volatile__("lock; cmpxchgb %b1,%2"
+                                    : "=a"(prev)
+                                    : "q"(new), "m"(*__synch_xg(ptr)),
+                                      "0"(old)
+                                    : "memory");
+               return prev;
+       case 2:
+               __asm__ __volatile__("lock; cmpxchgw %w1,%2"
+                                    : "=a"(prev)
+                                    : "q"(new), "m"(*__synch_xg(ptr)),
+                                      "0"(old)
+                                    : "memory");
+               return prev;
+       case 4:
+               __asm__ __volatile__("lock; cmpxchgl %1,%2"
+                                    : "=a"(prev)
+                                    : "q"(new), "m"(*__synch_xg(ptr)),
+                                      "0"(old)
+                                    : "memory");
+               return prev;
+       }
+       return old;
+}
+
 static __inline__ int synch_const_test_bit(int nr, const volatile void * addr)
 {
     return ((1UL << (nr & 31)) &